library(NNbenchmark)
library(ANN2)
library(stringr)
library(dplyr)
library(kableExtra)
Set Environment
options(scipen = 9999)
options("digits.secs" = 2)
timer <- createTimer(verbose = FALSE)
Datasets to Test
NNdataSummary(NNdatasets)
NN Train Function
hyperParams <- function(optim_method) {
if (!is.element(optim_method, c("sgd", "adam", "rmsprop"))) stop("Invalid Parameters.")
if (optim_method == "sgd") { iter <- 10; lr <- 0.01; params <- paste0("method=", optim_method, "_", "lr=", lr, "_", "iter=", iter)}
if (optim_method == "adam") { iter <- 15; lr <- 0.02; params <- paste0("method=", optim_method, "_", "lr=", lr, "_", "iter=", iter)}
if (optim_method == "rmsprop") { iter <- 20; lr <- 0.03; params <- paste0("method=", optim_method, "_", "lr=", lr, "_", "iter=", iter)}
params <- paste0("method=", optim_method, "_", "lr=", lr, "_", "iter=", iter)
out <- list(iter = iter, lr = lr, params = params)
return (out)
}
NNtrain <- function(x, y, hidden_neur, optim_method) {
hyper_params <- hyperParams(optim_method)
iter <- hyper_params$iter
lr <- hyper_params$lr
NNreg <- neuralnetwork(X = x, y = y,
val.prop = 0,
standardize = FALSE,
hidden.layers = c(hidden_neur),
regression = TRUE,
loss.type = "squared",
n.epochs = iter,
optim.type = optim_method,
learn.rates = lr,
verbose = FALSE,
random.seed = as.integer(runif(1)*10000000))
return (NNreg)
}
Main Loop
for (dset in names(NNdatasets)) {
##Â =============================================
##Â EXTRACT INFORMATION FROM THE SELECTED DATASET
##Â =============================================
ds <- NNdatasets[[dset]]$ds
Z <- NNdatasets[[dset]]$Z
neur <- NNdatasets[[dset]]$neur
nparNN <- NNdatasets[[dset]]$nparNN
fmlaNN <- NNdatasets[[dset]]$fmlaNN
donotremove <- c("dset", "dsets", "ds", "Z", "neur", "TF", "nrep", "timer",
"donotremove", "donotremove2")
donotremove2 <- c("dset", "dsets")
##Â ===================================================
## SELECT THE FORMAT REQUIRED BY THE PACKAGE/ALGORITHM
## d = data.frame, m = matrix, v = vector/numeric
##Â ATTACH THE OBJECTS CREATED (x, y, Zxy, ... )
## ===================================================
ZZ <- prepareZZ(Z, xdmv = "d", ydmv = "v", zdm = "d", scale = TRUE)
attach(ZZ)
##Â =============================================
##Â SELECT THE PACKAGE USED FOR TRAINING
## nrep => SELECT THE NUMBER OF INDEPENDANT RUNS
##Â iter => SELECT THE MAX NUMBER OF ITERATIONS
##Â TF => PLOT THE RESULTS
##Â =============================================
nrep <- 5
TF <- TRUE
method <- c("sgd", "adam", "rmsprop")
for (m in method) {
descr <- paste(dset, "ANN2::neuralnetwork", m, sep = "_")
##Â AUTO
Ypred <- list()
Rmse <- numeric(length = nrep)
Mae <- numeric(length = nrep)
for(i in 1:nrep){
event <- paste0(descr, sprintf("_%.2d", i))
timer$start(event)
#### ADJUST THE FOLLOWING LINES TO THE PACKAGE::ALGORITHM
hyper_params <- hyperParams(optim_method = m)
NNreg <- tryCatch(
NNtrain(x = x, y = y, hidden_neur = neur, optim_method = m),
error = function(y) {lm(y ~ 0, data = Zxy)}
)
y_pred <- tryCatch(
ym0 + ysd0*predict(NNreg, x)$predictions,
error = ym0
)
####
Ypred[[i]] <- y_pred
Rmse[i] <- funRMSE(y_pred, y0)
Mae[i] <- funMAE(y_pred, y0)
timer$stop(event, RMSE = Rmse[i], MAE = Mae[i], params = hyper_params$params, printmsg = FALSE)
}
best <- which(Rmse == min(Rmse, na.rm = TRUE))[1]
best ; Rmse[[best]]
## ================================================
##Â PLOT ALL MODELS AND THE MODEL WITH THE BEST RMSE
##Â par OPTIONS CAN BE IMPROVED FOR A BETTER DISPLAY
## ================================================
op <- par(mfcol = c(1,2))
plotNN(xory, y0, uni, TF, main = descr)
for (i in 1:nrep) lipoNN(xory, Ypred[[i]], uni, TF, col = i, lwd = 1)
plotNN(xory, y0, uni, TF, main = descr)
lipoNN(xory, Ypred[[best]], uni, TF, col = 4, lwd = 4)
par(op)
}
##Â ===========================
## DETACH ZZ - END OF THE LOOP
##Â ===========================
detach(ZZ)
}




































Results
dfr0 <- getTimer(timer)
dfr <- data.frame(
ds_pkg.fun_algo = stringr::str_sub(dfr0[ ,1], 1, -4),
run = stringr::str_sub(dfr0[ ,1], -2, -1),
dataset = stringr::str_replace_all(stringr::str_extract(dfr0[, 1], pattern = "^\\w*_"), fixed("_"), ""),
method = stringr::str_replace_all(stringr::str_extract(dfr0[, 1], pattern = "_\\w*_"), fixed("_"), ""),
Elapsed = round(dfr0[ ,4], 5),
params = dfr0$params,
dfr0[, c("RMSE","MAE")]
)
dfr
Best Results
dfr %>%
group_by(dataset, method) %>%
summarise(minRMSE = min(RMSE), meanRMSE = mean(RMSE), meanTime = mean(Elapsed)) %>%
kable() %>%
kable_styling(bootstrap_options = c("striped", "hover", "condensed", "responsive")) %>%
collapse_rows(columns = 1:2, valign = "top")
| dataset |
method |
minRMSE |
meanRMSE |
meanTime |
| mDette |
adam |
3.0948 |
3.97626 |
0.008 |
| rmsprop |
2.8776 |
3.36848 |
0.010 |
| sgd |
4.9184 |
6.50956 |
0.028 |
| mFriedman |
adam |
0.0808 |
0.11418 |
0.012 |
| rmsprop |
0.0591 |
0.09248 |
0.010 |
| sgd |
0.1261 |
0.13998 |
0.010 |
| mIshigami |
adam |
2.7179 |
2.76180 |
0.014 |
| rmsprop |
2.3168 |
2.67236 |
0.020 |
| sgd |
2.9226 |
3.07040 |
0.008 |
| mRef153 |
adam |
6.0301 |
7.10828 |
0.010 |
| rmsprop |
4.6908 |
5.95992 |
0.006 |
| sgd |
7.5511 |
8.13686 |
0.006 |
| uDmod1 |
adam |
0.5276 |
0.57870 |
0.006 |
| rmsprop |
0.4809 |
0.51358 |
0.008 |
| sgd |
0.5673 |
0.69250 |
0.006 |
| uDmod2 |
adam |
0.3298 |
0.48506 |
0.004 |
| rmsprop |
0.3885 |
0.44276 |
0.006 |
| sgd |
0.4324 |
0.48176 |
0.010 |
| uDreyfus1 |
adam |
0.6140 |
0.68476 |
0.004 |
| rmsprop |
0.4876 |
0.59500 |
0.000 |
| sgd |
0.6968 |
0.80534 |
0.002 |
| uDreyfus2 |
adam |
0.6809 |
0.90494 |
0.004 |
| rmsprop |
0.4592 |
0.52160 |
0.000 |
| sgd |
0.6762 |
0.73370 |
0.004 |
| uGauss1 |
adam |
26.7491 |
27.76072 |
0.010 |
| rmsprop |
22.7846 |
25.61874 |
0.004 |
| sgd |
27.5268 |
28.25596 |
0.004 |
| uGauss2 |
adam |
19.2962 |
21.46538 |
0.008 |
| rmsprop |
13.2070 |
14.95164 |
0.016 |
| sgd |
17.3750 |
23.07154 |
0.002 |
| uGauss3 |
adam |
17.2794 |
20.72808 |
0.004 |
| rmsprop |
12.5749 |
14.10714 |
0.004 |
| sgd |
23.3915 |
27.76456 |
0.006 |
| uNeuroOne |
adam |
0.9157 |
1.02970 |
0.002 |
| rmsprop |
0.8519 |
0.95704 |
0.004 |
| sgd |
0.9877 |
1.09072 |
0.002 |
clearNN(donotremove)
---
title: "NNbenchmark | ANN2"
author: Akshaj Verma
output: html_notebook
---


```{r message=FALSE, warning=FALSE}
library(NNbenchmark)
library(ANN2)

library(stringr)
library(dplyr)
library(kableExtra)
```

## Set Environment
```{r}
options(scipen = 9999)
options("digits.secs" = 2)
timer  <- createTimer(verbose = FALSE)
```

## Datasets to Test

```{r}
NNdataSummary(NNdatasets)
```

## NN Train Function
```{r}
hyperParams <- function(optim_method) {
    
    if (!is.element(optim_method, c("sgd", "adam", "rmsprop"))) stop("Invalid Parameters.")
    if (optim_method == "sgd") { iter <- 10; lr <- 0.01; params <- paste0("method=", optim_method, "_", "lr=", lr, "_", "iter=", iter)} 
    if (optim_method == "adam") { iter <- 15; lr <- 0.02; params <- paste0("method=", optim_method, "_", "lr=", lr, "_", "iter=", iter)} 
    if (optim_method == "rmsprop") { iter <- 20; lr <- 0.03; params <- paste0("method=", optim_method, "_", "lr=", lr, "_", "iter=", iter)} 
    
    params <- paste0("method=", optim_method, "_", "lr=", lr, "_", "iter=", iter)
    
    out <- list(iter = iter, lr = lr, params = params)
    
    return (out)
}



NNtrain <- function(x, y, hidden_neur, optim_method) {
    
    hyper_params <- hyperParams(optim_method)
    
    iter <- hyper_params$iter
    lr <- hyper_params$lr
    
    NNreg <- neuralnetwork(X = x, y = y, 
                           val.prop = 0, 
                           standardize = FALSE, 
                           hidden.layers = c(hidden_neur), 
                           regression = TRUE,
                           loss.type = "squared",
                           n.epochs = iter,
                           optim.type = optim_method,
                           learn.rates = lr,
                           verbose = FALSE,
                           random.seed = as.integer(runif(1)*10000000))
    
    return (NNreg)
}
```


## Main Loop
```{r fig.height=5, fig.width=14}
for (dset in names(NNdatasets)) {

    ## =============================================
    ## EXTRACT INFORMATION FROM THE SELECTED DATASET
    ## =============================================
    ds     <- NNdatasets[[dset]]$ds
    Z      <- NNdatasets[[dset]]$Z
    neur   <- NNdatasets[[dset]]$neur
    nparNN <- NNdatasets[[dset]]$nparNN
    fmlaNN <- NNdatasets[[dset]]$fmlaNN
    donotremove  <- c("dset", "dsets", "ds", "Z", "neur", "TF", "nrep", "timer",
                      "donotremove", "donotremove2")
    donotremove2 <- c("dset", "dsets") 



    ## ===================================================
    ## SELECT THE FORMAT REQUIRED BY THE PACKAGE/ALGORITHM
    ## d = data.frame, m = matrix, v = vector/numeric
    ## ATTACH THE OBJECTS CREATED (x, y, Zxy, ... )
    ## ===================================================
    ZZ     <- prepareZZ(Z, xdmv = "d", ydmv = "v", zdm = "d", scale = TRUE)
    attach(ZZ)

    ## =============================================
    ## SELECT THE PACKAGE USED FOR TRAINING
    ## nrep => SELECT THE NUMBER OF INDEPENDANT RUNS
    ## iter => SELECT THE MAX NUMBER OF ITERATIONS
    ## TF   => PLOT THE RESULTS
    ## =============================================

    
    nrep   <- 5
    TF     <- TRUE 

    method <- c("sgd", "adam", "rmsprop")
        
    for (m in method) {
        
        descr  <- paste(dset, "ANN2::neuralnetwork", m, sep = "_")

        ## AUTO
        Ypred  <- list()
        Rmse   <- numeric(length = nrep)
        Mae    <- numeric(length = nrep)
    
        for(i in 1:nrep){
            event      <- paste0(descr, sprintf("_%.2d", i))
            timer$start(event)
            #### ADJUST THE FOLLOWING LINES TO THE PACKAGE::ALGORITHM
            
            hyper_params <- hyperParams(optim_method = m)

            NNreg      <- tryCatch(
                            NNtrain(x = x, y = y, hidden_neur = neur, optim_method = m),
                            error = function(y) {lm(y ~ 0, data = Zxy)}
                          )     
            y_pred     <- tryCatch(
                            ym0 + ysd0*predict(NNreg, x)$predictions,
                            error = ym0
                          )     
            ####
            Ypred[[i]] <- y_pred
            Rmse[i]    <- funRMSE(y_pred, y0)
            Mae[i]     <- funMAE(y_pred, y0)
            timer$stop(event, RMSE = Rmse[i], MAE = Mae[i], params = hyper_params$params, printmsg = FALSE)
        }
        best <- which(Rmse == min(Rmse, na.rm = TRUE))[1]
        best ; Rmse[[best]]
        
        ## ================================================
        ## PLOT ALL MODELS AND THE MODEL WITH THE BEST RMSE
        ## par OPTIONS CAN BE IMPROVED FOR A BETTER DISPLAY
        ## ================================================
        op <- par(mfcol = c(1,2))
        plotNN(xory, y0, uni, TF, main = descr)
        for (i in 1:nrep) lipoNN(xory, Ypred[[i]], uni, TF, col = i, lwd = 1)
        
        plotNN(xory, y0, uni, TF, main = descr)
        lipoNN(xory, Ypred[[best]], uni, TF, col = 4, lwd = 4)
        par(op)
    }


## ===========================
## DETACH ZZ - END OF THE LOOP
## ===========================
    detach(ZZ)
}
```


## Results

```{r}
dfr0 <- getTimer(timer) 

dfr  <- data.frame(
    ds_pkg.fun_algo = stringr::str_sub(dfr0[ ,1], 1, -4),
    run     = stringr::str_sub(dfr0[ ,1], -2, -1),
    dataset = stringr::str_replace_all(stringr::str_extract(dfr0[, 1], pattern = "^\\w*_"), fixed("_"), ""),
    method = stringr::str_replace_all(stringr::str_extract(dfr0[, 1], pattern = "_\\w*_"), fixed("_"), ""),
    Elapsed = round(dfr0[ ,4], 5),
    params = dfr0$params,
    dfr0[, c("RMSE","MAE")]
)


dfr
```

## Best Results

```{r}
dfr %>%
    group_by(dataset, method) %>%
    summarise(minRMSE = min(RMSE), meanRMSE = mean(RMSE), meanTime = mean(Elapsed)) %>%
    kable() %>%
    kable_styling(bootstrap_options = c("striped", "hover", "condensed", "responsive")) %>%
    collapse_rows(columns = 1:2, valign = "top")
```

```{r}
clearNN(donotremove)
```

